library(rstan)
library(survival)
library(tidyverse)
library(tidybayes)
library(scales)
library(survminer)
Stan_exponential_survival_model <- "
data{
  int <lower=1> N_uncensored;
  int <lower=1> N_censored;
  int <lower=0> numCovariates;
  matrix[N_censored, numCovariates] X_censored;
  matrix[N_uncensored, numCovariates] X_uncensored;
  vector <lower=0>[N_censored] times_censored;
  vector <lower=0>[N_uncensored] times_uncensored;
}

parameters{
  vector[numCovariates] beta; //regression coefficients
  real alpha; //intercept
}

model{
  beta ~ normal(0,10); //prior on regression coefficients
  alpha ~ normal(0,10); //prior on intercept
  target += exponential_lpdf(times_uncensored | exp(alpha+X_uncensored * beta)); //log-likelihood part for uncensored times
  target += exponential_lccdf(times_censored | exp(alpha+X_censored * beta)); //log-likelihood for censored times
}

generated quantities{
  vector[N_uncensored] times_uncensored_sampled; //prediction of death
  for(i in 1:N_uncensored) {
    times_uncensored_sampled[i] = exponential_rng(exp(alpha+X_uncensored[i,]* beta));
  }
}
"
# prepare the data
set.seed(42); 
require (tidyverse);
data <- read_csv('../data/necessary_fields.csv')
Rows: 2066 Columns: 7
── Column specification ────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
Delimiter: ","
chr (1): host_type
dbl (1): duration_months
lgl (5): major_releases, censored, high_rev_freq, multi_repo, high_author_count

ℹ Use `spec()` to retrieve the full column specification for this data.
ℹ Specify the column types or set `show_col_types = FALSE` to quiet this message.
N <- nrow (data);
data$major_releases <- car::recode(data$major_releases, "'TRUE' = 0; 'FALSE' = 1")
data$censored <- car::recode(data$censored, "'TRUE' = 0; 'FALSE' = 1")
X <- as.matrix(pull(data, major_releases)); 
is_censored <- pull(data, censored)== 0; 
times <- pull(data, duration_months); 
msk_censored <- is_censored == 1; 
N_censored <- sum(msk_censored);
Stan_data <- list (N_uncensored = N - N_censored, 
                    N_censored = N_censored,
                    numCovariates = ncol(X), 
                    X_censored = as.matrix(X[msk_censored,]),
                    X_uncensored = as.matrix(X[!msk_censored ,]), 
                    times_censored = times[msk_censored],
                    times_uncensored = times[!msk_censored])
# Fit Stan model
require(rstan)
exp_surv_model_fit <- suppressMessages(stan(model_code = Stan_exponential_survival_model, data = Stan_data))
sh: Data/bayesian: No such file or directory
sh: clang++ -mmacosx-version-min=10.13: command not found
Warning in system2(CXX, args = ARGS) : error in running command
Warning in file.remove(c(unprocessed, processed)) :
  cannot remove file '/var/folders/q8/7tchbyvd1dj3hkgw5ffkk6ph0000gp/T//RtmpHtsCUK/file985c8a5369f.stan', reason 'No such file or directory'

SAMPLING FOR MODEL 'bf5dbbde6a245330de71a285e3fe7c42' NOW (CHAIN 1).
Chain 1: 
Chain 1: Gradient evaluation took 0.000286 seconds
Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 2.86 seconds.
Chain 1: Adjust your expectations accordingly!
Chain 1: 
Chain 1: 
Chain 1: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 1: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 1: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 1: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 1: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 1: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 1: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 1: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 1: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 1: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 1: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 1: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 1: 
Chain 1:  Elapsed Time: 2.17515 seconds (Warm-up)
Chain 1:                2.28051 seconds (Sampling)
Chain 1:                4.45566 seconds (Total)
Chain 1: 

SAMPLING FOR MODEL 'bf5dbbde6a245330de71a285e3fe7c42' NOW (CHAIN 2).
Chain 2: 
Chain 2: Gradient evaluation took 0.000184 seconds
Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 1.84 seconds.
Chain 2: Adjust your expectations accordingly!
Chain 2: 
Chain 2: 
Chain 2: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 2: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 2: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 2: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 2: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 2: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 2: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 2: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 2: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 2: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 2: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 2: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 2: 
Chain 2:  Elapsed Time: 2.213 seconds (Warm-up)
Chain 2:                2.08343 seconds (Sampling)
Chain 2:                4.29643 seconds (Total)
Chain 2: 

SAMPLING FOR MODEL 'bf5dbbde6a245330de71a285e3fe7c42' NOW (CHAIN 3).
Chain 3: 
Chain 3: Gradient evaluation took 0.000184 seconds
Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 1.84 seconds.
Chain 3: Adjust your expectations accordingly!
Chain 3: 
Chain 3: 
Chain 3: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 3: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 3: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 3: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 3: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 3: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 3: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 3: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 3: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 3: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 3: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 3: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 3: 
Chain 3:  Elapsed Time: 2.0885 seconds (Warm-up)
Chain 3:                2.16783 seconds (Sampling)
Chain 3:                4.25633 seconds (Total)
Chain 3: 

SAMPLING FOR MODEL 'bf5dbbde6a245330de71a285e3fe7c42' NOW (CHAIN 4).
Chain 4: 
Chain 4: Gradient evaluation took 0.0002 seconds
Chain 4: 1000 transitions using 10 leapfrog steps per transition would take 2 seconds.
Chain 4: Adjust your expectations accordingly!
Chain 4: 
Chain 4: 
Chain 4: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 4: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 4: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 4: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 4: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 4: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 4: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 4: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 4: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 4: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 4: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 4: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 4: 
Chain 4:  Elapsed Time: 2.30938 seconds (Warm-up)
Chain 4:                2.11088 seconds (Sampling)
Chain 4:                4.42026 seconds (Total)
Chain 4: 
print(get_seed(exp_surv_model_fit))
[1] 1781592037
fit_summary <- summary(exp_surv_model_fit)
print(fit_summary$summary)
                                      mean     se_mean          sd         2.5%           25%          50%          75%        97.5%     n_eff      Rhat
beta[1]                           1.068763 0.005816100   0.1444464     0.790465     0.9761651     1.067160     1.158581     1.373116  616.8074 1.0071725
alpha                            -5.773486 0.005579162   0.1389656    -6.062658    -5.8602008    -5.772233    -5.682895    -5.509260  620.4070 1.0076445
times_uncensored_sampled[1]     110.243140 1.736177866 110.5102633     3.004338    31.2820284    76.183068   154.169910   398.796930 4051.5089 1.0002822
times_uncensored_sampled[2]     108.947259 1.726552927 108.4249757     2.942676    31.9812592    77.102353   150.357224   401.529278 3943.6546 0.9997612
times_uncensored_sampled[3]     113.426420 1.849959973 112.2518639     2.877944    34.1400552    79.347520   156.244317   414.561044 3681.8192 0.9993805
times_uncensored_sampled[4]     111.518056 1.753087951 110.7728501     2.541087    31.9003489    77.392601   155.248452   404.519113 3992.6317 0.9995715
times_uncensored_sampled[5]     110.489787 1.734258261 109.5692159     2.956307    32.8630605    76.205704   152.836729   406.652097 3991.6234 0.9996255
times_uncensored_sampled[6]     111.379860 1.721726409 110.9651164     2.747596    31.9850964    77.994357   154.640477   406.163409 4153.7912 0.9995824
times_uncensored_sampled[7]     110.303823 1.760817980 110.4861524     2.892333    32.1325146    76.248466   151.848846   401.235327 3937.1936 1.0006718
times_uncensored_sampled[8]     110.140183 1.783837678 107.5528470     3.279413    33.6761498    77.219346   154.925368   397.697080 3635.2406 0.9995725
times_uncensored_sampled[9]     109.168923 1.809698699 109.9572607     2.895837    30.9878137    75.001860   150.497153   406.308839 3691.7754 1.0003221
times_uncensored_sampled[10]    110.872299 1.783161430 111.2854160     2.691868    30.8370041    78.032111   154.346484   406.452319 3894.8899 0.9992723
times_uncensored_sampled[11]    108.614731 1.714965970 109.1200621     2.739307    31.6455188    72.797807   151.442040   404.740169 4048.5378 0.9998170
times_uncensored_sampled[12]    107.751061 1.717535935 108.0197067     2.284966    31.0340227    75.589581   148.293648   402.546166 3955.4356 0.9993178
times_uncensored_sampled[13]    110.446129 1.765959811 111.8814696     3.155630    32.1957111    75.587362   152.199114   401.397368 4013.7904 0.9998564
times_uncensored_sampled[14]    109.851551 1.684822463 107.5349108     2.461176    32.4178990    76.389589   153.817899   411.342804 4073.7153 0.9993718
times_uncensored_sampled[15]    109.857209 1.778466944 110.6395773     2.679260    30.9008183    77.259022   152.222422   411.104903 3870.1645 1.0002699
times_uncensored_sampled[16]    111.521543 1.775551304 113.4907789     3.076941    31.0764767    77.529074   154.576551   413.689984 4085.5893 0.9996635
times_uncensored_sampled[17]    109.533705 1.753209617 108.8983408     2.792509    30.4546584    74.032467   153.172218   398.911966 3858.1121 0.9999021
times_uncensored_sampled[18]    110.536137 1.742304264 108.1530566     2.778019    33.1963248    78.437690   154.113070   391.458787 3853.2714 1.0003456
times_uncensored_sampled[19]    112.105289 1.862677732 114.6761120     2.665644    31.1198078    75.347735   153.828943   417.337405 3790.2729 1.0004675
times_uncensored_sampled[20]    111.033085 1.726208336 109.4573344     3.049620    33.3502072    78.520206   152.344597   395.436750 4020.7152 1.0000035
times_uncensored_sampled[21]    112.808355 1.777414708 112.5051853     2.686877    31.7530067    77.217544   157.903343   401.771585 4006.5221 1.0010791
times_uncensored_sampled[22]    107.263882 1.728249939 105.4537221     2.796784    29.9890421    75.324604   150.604945   390.356054 3723.1516 1.0016366
times_uncensored_sampled[23]    107.765321 1.664512886 107.0343802     2.651122    30.7192600    76.299133   147.507777   403.303651 4134.9692 0.9993020
times_uncensored_sampled[24]    112.200410 1.885444569 116.9103943     2.601950    30.4701118    73.913592   153.237883   431.050720 3844.8439 1.0005974
times_uncensored_sampled[25]    110.411238 1.694177390 110.3801867     2.932642    32.1849003    77.628451   151.706144   409.119324 4244.8709 0.9998554
times_uncensored_sampled[26]    108.115141 1.752860761 108.3880593     2.603952    31.9384291    74.843911   147.521999   399.156622 3823.5612 1.0005519
times_uncensored_sampled[27]    113.482701 1.802322905 114.6432290     2.689389    33.1227817    79.116924   154.441403   420.695448 4046.0535 0.9998846
times_uncensored_sampled[28]    109.524845 1.721309562 107.6252332     2.797975    33.0563720    76.803375   151.986126   399.006433 3909.4013 1.0003794
times_uncensored_sampled[29]    111.326963 1.710138092 110.8363535     2.746640    30.9949468    76.559484   155.515121   402.621589 4200.5107 0.9995621
times_uncensored_sampled[30]    111.445665 1.735606568 110.9255435     2.594088    32.0396229    76.187800   156.173695   408.121239 4084.7037 0.9996115
times_uncensored_sampled[31]    111.497230 1.804126234 113.9994517     3.006406    32.9165805    76.719974   152.223126   404.430214 3992.7460 0.9999687
times_uncensored_sampled[32]    109.973088 1.808792628 108.1405466     2.715255    31.7573398    77.876527   151.955820   386.602260 3574.3705 0.9999782
times_uncensored_sampled[33]    112.509098 1.820472335 114.8569769     3.357583    32.1200884    75.867538   155.184766   422.560379 3980.5822 0.9999326
times_uncensored_sampled[34]    111.202707 1.812667159 111.9106826     2.813270    31.8166825    76.914664   154.406809   421.565398 3811.5968 0.9995151
times_uncensored_sampled[35]    108.512073 1.737431921 109.1811660     2.733623    31.2872658    76.841191   149.377073   389.790491 3948.9339 1.0006845
times_uncensored_sampled[36]    111.919565 1.829747433 113.3388514     2.447820    31.6047749    75.570446   157.224558   394.496658 3836.8544 0.9994711
times_uncensored_sampled[37]    108.075554 1.681937695 107.9720988     2.744366    31.3584720    74.306837   152.074521   403.119623 4121.0063 0.9997310
times_uncensored_sampled[38]    111.255677 1.777698393 109.1985578     2.993910    33.6250527    77.630468   153.058268   393.383205 3773.2679 0.9998253
times_uncensored_sampled[39]    109.476718 1.754071806 109.3698931     2.895279    31.0399812    77.205963   150.824692   410.695089 3887.7725 0.9996770
times_uncensored_sampled[40]    110.227257 1.759071822 111.8466903     3.227610    30.7768954    76.092770   151.127151   407.947908 4042.7709 1.0000989
times_uncensored_sampled[41]    110.650918 1.786519298 108.0703837     2.647763    31.9845630    77.195985   153.455977   403.500915 3659.2996 1.0002175
times_uncensored_sampled[42]    111.081862 1.785876584 109.0910740     2.814263    32.6088093    77.707271   151.508503   400.600482 3731.4321 0.9997544
times_uncensored_sampled[43]    111.169415 1.753397545 112.1931368     2.925158    31.8670852    76.628954   152.206426   418.865781 4094.2258 0.9993296
times_uncensored_sampled[44]    111.357828 1.757520669 109.4285781     3.112647    33.5807523    78.519074   153.669871   408.806892 3876.6860 0.9999278
times_uncensored_sampled[45]    109.559247 1.753297335 107.7614547     3.041875    32.4959102    76.198042   153.331570   393.768321 3777.5981 1.0001621
times_uncensored_sampled[46]    110.953080 1.725286819 110.0388847     2.327364    32.8589806    78.155389   156.436811   406.841820 4067.8952 1.0004489
times_uncensored_sampled[47]    109.859547 1.727873723 107.8828646     3.400309    32.9383132    77.368959   151.646110   388.054948 3898.3510 0.9998799
times_uncensored_sampled[48]    108.627840 1.740874850 107.8691616     2.735588    30.5364309    74.905215   152.630884   387.862475 3839.3659 0.9998693
times_uncensored_sampled[49]    110.685262 1.765278387 111.0702672     2.681748    32.3622746    74.193306   153.345279   418.138419 3958.8516 1.0005514
times_uncensored_sampled[50]    112.541384 1.837123518 113.3787657     2.718055    30.9911499    77.122071   157.404537   415.370582 3808.7874 1.0006833
times_uncensored_sampled[51]    110.272434 1.772896065 110.4264132     2.842937    30.9161239    76.971293   151.734726   421.025711 3879.5324 0.9992347
times_uncensored_sampled[52]    111.848450 1.697761146 109.8010832     3.086877    33.6873636    79.807905   156.504981   396.017333 4182.7323 0.9993325
times_uncensored_sampled[53]    112.725091 1.849761112 114.2522829     2.687336    32.0840001    78.008117   152.939989   422.030882 3815.0346 1.0007891
times_uncensored_sampled[54]    110.803157 1.777886979 109.5345466     2.945389    33.0859221    77.181682   153.197194   401.643052 3795.7179 1.0002902
times_uncensored_sampled[55]    112.038439 1.833409988 116.2085336     2.640620    31.0358126    76.289710   154.537751   419.398046 4017.5090 0.9999060
times_uncensored_sampled[56]    111.366501 1.727555032 110.0425759     2.430789    32.6863896    77.323847   155.611207   397.449894 4057.4924 1.0001887
times_uncensored_sampled[57]    109.565972 1.716767016 109.6574808     2.546632    33.2929529    75.676839   147.837813   403.100448 4079.9403 0.9995424
times_uncensored_sampled[58]    109.235744 1.710481644 109.0942930     3.236615    30.4756615    76.508599   151.574146   408.095332 4067.8715 0.9999103
times_uncensored_sampled[59]    108.341945 1.707131260 108.2617664     2.262560    29.7650202    74.370331   151.489971   401.234139 4021.7622 0.9998265
times_uncensored_sampled[60]    112.765742 1.831698199 110.9808787     3.000064    32.4876321    79.262133   155.999320   420.843946 3671.0346 0.9993438
times_uncensored_sampled[61]    111.775700 1.742836262 108.2667440     2.872556    33.2700717    79.523216   155.814947   405.672731 3859.0195 0.9996573
times_uncensored_sampled[62]    111.253757 1.760229996 113.0650107     2.968211    30.9837297    76.754047   155.782439   410.464676 4125.8898 1.0004291
times_uncensored_sampled[63]    107.650569 1.670723448 106.0732676     3.135326    32.4372486    74.000563   149.283492   393.576998 4030.9068 0.9998148
times_uncensored_sampled[64]    109.011410 1.890274414 109.6143058     2.707542    32.1352562    75.568832   148.187682   408.938574 3362.6740 1.0001231
times_uncensored_sampled[65]    109.728007 1.683673304 107.6242184     2.908873    32.6687728    76.213867   152.833072   408.392643 4086.0565 1.0013276
times_uncensored_sampled[66]    112.054583 1.843987187 113.7809461     2.791058    31.2815880    77.187049   153.763831   417.078829 3807.3542 1.0000252
times_uncensored_sampled[67]    106.215249 1.658429929 108.3203943     2.534020    30.1993923    72.370475   145.787064   402.276428 4266.0526 1.0007775
times_uncensored_sampled[68]    109.472472 1.757499229 109.3465370     2.739911    31.8817876    77.000758   152.059420   395.438041 3870.9698 0.9998084
times_uncensored_sampled[69]    108.551538 1.748298567 110.4975854     2.530059    30.5667010    75.143227   149.410068   400.685475 3994.6099 0.9998959
times_uncensored_sampled[70]    111.389982 1.765086261 113.0628513     2.923384    31.3929162    78.120198   152.683547   413.300796 4103.0612 0.9996041
times_uncensored_sampled[71]    113.596079 1.808499710 115.8776224     2.959565    32.5851197    78.192680   155.213176   437.488071 4105.4642 0.9992187
times_uncensored_sampled[72]    110.993421 1.774971096 111.4490585     2.783190    30.9916139    75.595963   155.937988   418.715604 3942.4867 0.9999125
times_uncensored_sampled[73]    110.276387 1.768315355 110.4687237     2.419073    31.4446728    74.082837   151.952028   401.143114 3902.6467 1.0007533
times_uncensored_sampled[74]    110.808074 1.709115995 110.9046938     3.353438    31.1568073    77.553202   153.941526   401.650237 4210.7240 0.9992156
times_uncensored_sampled[75]    111.834532 1.758146915 112.2154885     2.606016    30.8782402    75.625542   155.351128   423.501910 4073.7585 1.0001040
times_uncensored_sampled[76]    109.591634 1.734164894 108.6899020     2.801385    32.5992710    77.252232   149.872330   414.351375 3928.2364 0.9999127
times_uncensored_sampled[77]    111.242330 1.736455438 109.1999985     3.364447    33.4448918    77.636131   153.565805   389.837950 3954.7404 0.9997231
times_uncensored_sampled[78]    111.091190 1.701696483 109.4397934     3.016364    33.2078411    75.844864   156.235710   402.941617 4136.0552 0.9995423
times_uncensored_sampled[79]    110.094686 1.701833532 110.1664836     3.080693    31.8439800    77.332826   151.779747   400.018841 4190.4901 1.0004777
times_uncensored_sampled[80]    110.094966 1.742785511 110.4757839     2.589390    31.8827203    76.263607   150.189713   395.335800 4018.3365 1.0000848
times_uncensored_sampled[81]    112.793035 1.934660761 115.7745208     2.697738    32.0473964    76.392437   153.912153   420.885489 3581.0991 1.0008772
times_uncensored_sampled[82]    109.004884 1.682663012 107.3432029     2.735050    32.6224686    76.162589   151.329958   389.878759 4069.6288 0.9999300
times_uncensored_sampled[83]    110.788675 1.745904919 108.0894958     3.456156    33.5805401    78.838787   150.651570   399.774746 3832.8851 0.9994998
times_uncensored_sampled[84]    112.286261 1.830487634 113.3139504     2.848376    30.4893818    77.773177   153.586903   421.913075 3832.0676 1.0004582
times_uncensored_sampled[85]    112.350156 1.730511694 110.3735917     2.912954    32.2518818    79.095121   158.485509   412.099086 4068.0031 0.9996667
times_uncensored_sampled[86]    111.218621 1.752484637 110.5578829     3.051175    31.6281250    77.823080   154.298339   397.592316 3979.8893 1.0015065
times_uncensored_sampled[87]    109.907808 1.762660498 110.3062060     2.733825    31.7911630    77.130368   149.606637   407.074522 3916.1791 0.9999269
times_uncensored_sampled[88]    108.437775 1.670206270 105.8529192     2.355140    33.0811240    76.241310   149.986492   397.164220 4016.6636 0.9994306
times_uncensored_sampled[89]    112.830999 1.770433411 110.1007967     2.774042    33.0003935    79.118217   156.561646   420.140474 3867.4235 0.9997485
times_uncensored_sampled[90]    110.808001 1.793460577 112.2531990     2.676612    31.2623452    76.238769   151.759007   412.320865 3917.5431 1.0010423
times_uncensored_sampled[91]    110.293719 1.740526036 109.1527616     2.791111    31.9783191    77.499408   153.548390   405.953661 3932.8593 0.9995039
times_uncensored_sampled[92]    109.892355 1.747964115 108.1488975     2.666570    32.3559505    78.567681   154.618721   400.186519 3828.0638 0.9996386
times_uncensored_sampled[93]    109.158096 1.814711713 109.9948032     2.834072    31.3697465    75.851135   151.862220   406.112898 3673.9145 1.0000557
times_uncensored_sampled[94]    110.124082 1.799533578 111.5782393     2.774552    31.2918851    75.886862   152.383453   425.478711 3844.4932 0.9999177
times_uncensored_sampled[95]    111.546125 1.744254019 109.9141666     2.807639    32.8612099    78.307635   154.903433   402.434992 3970.8902 1.0005189
times_uncensored_sampled[96]    108.924880 1.774869206 112.0284876     2.488381    31.0961337    74.183634   150.887605   404.857704 3984.0450 0.9997342
times_uncensored_sampled[97]    108.700940 1.815202320 108.7443980     2.350637    31.0534132    75.699706   149.166454   399.958052 3588.9194 1.0013870
times_uncensored_sampled[98]    111.189635 1.777586824 111.9204644     2.679688    30.2355761    76.307471   156.167158   408.759040 3964.2165 1.0002055
 [ reached getOption("max.print") -- omitted 678 rows ]
exp_surv_model_draws <- tidybayes::tidy_draws(exp_surv_model_fit)
exp_surv_model_draws
## Constructor for Strata-specific survival function
construct_survival_function <- function(alpha, beta, x) {
    function(t) {
        lambda <- exp(alpha + x*beta)
        exp(-(lambda * t))
    }
}

## Random functions
exp_surv_model_surv_func <-
    exp_surv_model_draws %>%
    select(.chain, .iteration, .draw, alpha, `beta[1]`) %>%
    ## Simplify name
    rename(beta = `beta[1]`) %>%
    ## Construct realization of random functions
    mutate(`S(t|1)` = pmap(list(alpha, beta), function(a,b) {construct_survival_function(a,b,1)}),
           `S(t|0)` = pmap(list(alpha, beta), function(a,b) {construct_survival_function(a,b,0)}))
exp_surv_model_surv_func
times <- seq(from = 0, to = 165, by = 0.1)
times_df <- data_frame(t = times)
Warning: `data_frame()` was deprecated in tibble 1.1.0.
Please use `tibble()` instead.
This warning is displayed once every 8 hours.
Call `lifecycle::last_lifecycle_warnings()` to see where this warning was generated.
## Try first realizations
exp_surv_model_surv_func$`S(t|1)`[[1]](times[1:10])
 [1] 1.0000000 0.9990954 0.9981915 0.9972885 0.9963863 0.9954849 0.9945844 0.9936846 0.9927857 0.9918876
exp_surv_model_surv_func$`S(t|0)`[[1]](times[1:10])
 [1] 1.0000000 0.9996718 0.9993437 0.9990157 0.9986879 0.9983601 0.9980324 0.9977049 0.9973774 0.9970501
## Apply all realizations
exp_surv_model_survival <-
    exp_surv_model_surv_func %>%
    mutate(times_df = list(times_df)) %>%
    mutate(times_df = pmap(list(times_df, `S(t|1)`, `S(t|0)`),
                           function(df, s1, s0) {df %>% mutate(s1 = s1(t),
                                                               s0 = s0(t))})) %>%
    select(-`S(t|1)`, -`S(t|0)`) %>%
    unnest(cols = c(times_df)) %>%
    gather(key = Strata, value = survival, s1, s0) %>%
    mutate(Strata = factor(Strata, # Strata is whether or not project has major releases
                              levels = c("s1","s0"),
                              labels = c("major releases=No","major releases=Yes")))

## Average on survival scale
exp_surv_model_survival_mean <-
    exp_surv_model_survival %>%
    group_by(Strata, t) %>%
    summarize(survival_mean = mean(survival),
              survival_95upper = quantile(survival, probs = 0.975),
              survival_95lower = quantile(survival, probs = 0.025))
`summarise()` has grouped output by 'Strata'. You can override using the `.groups` argument.
exp_surv_model_survival
# plot the graphs
(ggplot(data = exp_surv_model_survival, mapping = aes(x = t, y = survival, color = Strata, group = interaction(.chain,.draw,Strata))) 
  + theme_survminer()
 + geom_line(size = 0.1, alpha = 0.02) 
 + geom_line(data = exp_surv_model_survival_mean, mapping = aes(y = survival_mean, group = Strata)) 
 + geom_line(data = exp_surv_model_survival_mean, mapping = aes(y = survival_95upper, group = Strata), linetype = "dotted") 
 + geom_line(data = exp_surv_model_survival_mean, mapping = aes(y = survival_95lower, group = Strata), linetype = "dotted")
 + scale_y_continuous(limits=c(0,1))
 + scale_x_continuous(limits=c(0,160), breaks = c(0,40,80,120,160))
 +labs(x = "Time (Months)", y = "Survival probability"))
Warning: Removed 400000 row(s) containing missing values (geom_path).
Warning: Removed 100 row(s) containing missing values (geom_path).
Warning: Removed 100 row(s) containing missing values (geom_path).
Warning: Removed 100 row(s) containing missing values (geom_path).

LS0tCnRpdGxlOiAiQmF5ZXNpYW4gc3Vydml2YWwgYW5hbHlzaXMgdXNpbmcgbWFqb3IgcmVsZWFzZXMgYXMgYSBwcmVkaWN0b3IiCm91dHB1dDogaHRtbF9ub3RlYm9vawotLS0KYGBge3J9CmxpYnJhcnkocnN0YW4pCmxpYnJhcnkoc3Vydml2YWwpCmxpYnJhcnkodGlkeXZlcnNlKQpsaWJyYXJ5KHRpZHliYXllcykKbGlicmFyeShzY2FsZXMpCmxpYnJhcnkoc3Vydm1pbmVyKQpgYGAKIAoKYGBge3J9ClN0YW5fZXhwb25lbnRpYWxfc3Vydml2YWxfbW9kZWwgPC0gIgpkYXRhewogIGludCA8bG93ZXI9MT4gTl91bmNlbnNvcmVkOwogIGludCA8bG93ZXI9MT4gTl9jZW5zb3JlZDsKICBpbnQgPGxvd2VyPTA+IG51bUNvdmFyaWF0ZXM7CiAgbWF0cml4W05fY2Vuc29yZWQsIG51bUNvdmFyaWF0ZXNdIFhfY2Vuc29yZWQ7CiAgbWF0cml4W05fdW5jZW5zb3JlZCwgbnVtQ292YXJpYXRlc10gWF91bmNlbnNvcmVkOwogIHZlY3RvciA8bG93ZXI9MD5bTl9jZW5zb3JlZF0gdGltZXNfY2Vuc29yZWQ7CiAgdmVjdG9yIDxsb3dlcj0wPltOX3VuY2Vuc29yZWRdIHRpbWVzX3VuY2Vuc29yZWQ7Cn0KCnBhcmFtZXRlcnN7CiAgdmVjdG9yW251bUNvdmFyaWF0ZXNdIGJldGE7IC8vcmVncmVzc2lvbiBjb2VmZmljaWVudHMKICByZWFsIGFscGhhOyAvL2ludGVyY2VwdAp9Cgptb2RlbHsKICBiZXRhIH4gbm9ybWFsKDAsMTApOyAvL3ByaW9yIG9uIHJlZ3Jlc3Npb24gY29lZmZpY2llbnRzCiAgYWxwaGEgfiBub3JtYWwoMCwxMCk7IC8vcHJpb3Igb24gaW50ZXJjZXB0CiAgdGFyZ2V0ICs9IGV4cG9uZW50aWFsX2xwZGYodGltZXNfdW5jZW5zb3JlZCB8IGV4cChhbHBoYStYX3VuY2Vuc29yZWQgKiBiZXRhKSk7IC8vbG9nLWxpa2VsaWhvb2QgcGFydCBmb3IgdW5jZW5zb3JlZCB0aW1lcwogIHRhcmdldCArPSBleHBvbmVudGlhbF9sY2NkZih0aW1lc19jZW5zb3JlZCB8IGV4cChhbHBoYStYX2NlbnNvcmVkICogYmV0YSkpOyAvL2xvZy1saWtlbGlob29kIGZvciBjZW5zb3JlZCB0aW1lcwp9CgpnZW5lcmF0ZWQgcXVhbnRpdGllc3sKICB2ZWN0b3JbTl91bmNlbnNvcmVkXSB0aW1lc191bmNlbnNvcmVkX3NhbXBsZWQ7IC8vcHJlZGljdGlvbiBvZiBkZWF0aAogIGZvcihpIGluIDE6Tl91bmNlbnNvcmVkKSB7CiAgICB0aW1lc191bmNlbnNvcmVkX3NhbXBsZWRbaV0gPSBleHBvbmVudGlhbF9ybmcoZXhwKGFscGhhK1hfdW5jZW5zb3JlZFtpLF0qIGJldGEpKTsKICB9Cn0KIgpgYGAKCmBgYHtyfQojIHByZXBhcmUgdGhlIGRhdGEKc2V0LnNlZWQoNDIpOyAKcmVxdWlyZSAodGlkeXZlcnNlKTsKZGF0YSA8LSByZWFkX2NzdignLi4vZGF0YS9uZWNlc3NhcnlfZmllbGRzLmNzdicpCk4gPC0gbnJvdyAoZGF0YSk7CmRhdGEkbWFqb3JfcmVsZWFzZXMgPC0gY2FyOjpyZWNvZGUoZGF0YSRtYWpvcl9yZWxlYXNlcywgIidUUlVFJyA9IDA7ICdGQUxTRScgPSAxIikKZGF0YSRjZW5zb3JlZCA8LSBjYXI6OnJlY29kZShkYXRhJGNlbnNvcmVkLCAiJ1RSVUUnID0gMDsgJ0ZBTFNFJyA9IDEiKQpYIDwtIGFzLm1hdHJpeChwdWxsKGRhdGEsIG1ham9yX3JlbGVhc2VzKSk7IAppc19jZW5zb3JlZCA8LSBwdWxsKGRhdGEsIGNlbnNvcmVkKT09IDA7IAp0aW1lcyA8LSBwdWxsKGRhdGEsIGR1cmF0aW9uX21vbnRocyk7IAptc2tfY2Vuc29yZWQgPC0gaXNfY2Vuc29yZWQgPT0gMTsgCk5fY2Vuc29yZWQgPC0gc3VtKG1za19jZW5zb3JlZCk7CmBgYAoKYGBge3J9ClN0YW5fZGF0YSA8LSBsaXN0IChOX3VuY2Vuc29yZWQgPSBOIC0gTl9jZW5zb3JlZCwgCiAgICAgICAgICAgICAgICAgICAgTl9jZW5zb3JlZCA9IE5fY2Vuc29yZWQsCiAgICAgICAgICAgICAgICAgICAgbnVtQ292YXJpYXRlcyA9IG5jb2woWCksIAogICAgICAgICAgICAgICAgICAgIFhfY2Vuc29yZWQgPSBhcy5tYXRyaXgoWFttc2tfY2Vuc29yZWQsXSksCiAgICAgICAgICAgICAgICAgICAgWF91bmNlbnNvcmVkID0gYXMubWF0cml4KFhbIW1za19jZW5zb3JlZCAsXSksIAogICAgICAgICAgICAgICAgICAgIHRpbWVzX2NlbnNvcmVkID0gdGltZXNbbXNrX2NlbnNvcmVkXSwKICAgICAgICAgICAgICAgICAgICB0aW1lc191bmNlbnNvcmVkID0gdGltZXNbIW1za19jZW5zb3JlZF0pCmBgYAoKYGBge3J9CiMgRml0IFN0YW4gbW9kZWwKcmVxdWlyZShyc3RhbikKZXhwX3N1cnZfbW9kZWxfZml0IDwtIHN1cHByZXNzTWVzc2FnZXMoc3Rhbihtb2RlbF9jb2RlID0gU3Rhbl9leHBvbmVudGlhbF9zdXJ2aXZhbF9tb2RlbCwgZGF0YSA9IFN0YW5fZGF0YSkpCmBgYAoKYGBge3J9CnByaW50KGdldF9zZWVkKGV4cF9zdXJ2X21vZGVsX2ZpdCkpCmBgYAoKYGBge3J9CmZpdF9zdW1tYXJ5IDwtIHN1bW1hcnkoZXhwX3N1cnZfbW9kZWxfZml0KQpwcmludChmaXRfc3VtbWFyeSRzdW1tYXJ5KQpgYGAKCmBgYHtyfQpleHBfc3Vydl9tb2RlbF9kcmF3cyA8LSB0aWR5YmF5ZXM6OnRpZHlfZHJhd3MoZXhwX3N1cnZfbW9kZWxfZml0KQpleHBfc3Vydl9tb2RlbF9kcmF3cwpgYGAKIApgYGB7cn0KIyMgQ29uc3RydWN0b3IgZm9yIFN0cmF0YS1zcGVjaWZpYyBzdXJ2aXZhbCBmdW5jdGlvbgpjb25zdHJ1Y3Rfc3Vydml2YWxfZnVuY3Rpb24gPC0gZnVuY3Rpb24oYWxwaGEsIGJldGEsIHgpIHsKICAgIGZ1bmN0aW9uKHQpIHsKICAgICAgICBsYW1iZGEgPC0gZXhwKGFscGhhICsgeCpiZXRhKQogICAgICAgIGV4cCgtKGxhbWJkYSAqIHQpKQogICAgfQp9CgojIyBSYW5kb20gZnVuY3Rpb25zCmV4cF9zdXJ2X21vZGVsX3N1cnZfZnVuYyA8LQogICAgZXhwX3N1cnZfbW9kZWxfZHJhd3MgJT4lCiAgICBzZWxlY3QoLmNoYWluLCAuaXRlcmF0aW9uLCAuZHJhdywgYWxwaGEsIGBiZXRhWzFdYCkgJT4lCiAgICAjIyBTaW1wbGlmeSBuYW1lCiAgICByZW5hbWUoYmV0YSA9IGBiZXRhWzFdYCkgJT4lCiAgICAjIyBDb25zdHJ1Y3QgcmVhbGl6YXRpb24gb2YgcmFuZG9tIGZ1bmN0aW9ucwogICAgbXV0YXRlKGBTKHR8MSlgID0gcG1hcChsaXN0KGFscGhhLCBiZXRhKSwgZnVuY3Rpb24oYSxiKSB7Y29uc3RydWN0X3N1cnZpdmFsX2Z1bmN0aW9uKGEsYiwxKX0pLAogICAgICAgICAgIGBTKHR8MClgID0gcG1hcChsaXN0KGFscGhhLCBiZXRhKSwgZnVuY3Rpb24oYSxiKSB7Y29uc3RydWN0X3N1cnZpdmFsX2Z1bmN0aW9uKGEsYiwwKX0pKQpleHBfc3Vydl9tb2RlbF9zdXJ2X2Z1bmMKYGBgCgpgYGB7cn0KdGltZXMgPC0gc2VxKGZyb20gPSAwLCB0byA9IDE2NSwgYnkgPSAwLjEpCnRpbWVzX2RmIDwtIGRhdGFfZnJhbWUodCA9IHRpbWVzKQoKIyMgVHJ5IGZpcnN0IHJlYWxpemF0aW9ucwpleHBfc3Vydl9tb2RlbF9zdXJ2X2Z1bmMkYFModHwxKWBbWzFdXSh0aW1lc1sxOjEwXSkKYGBgCgpgYGB7cn0KZXhwX3N1cnZfbW9kZWxfc3Vydl9mdW5jJGBTKHR8MClgW1sxXV0odGltZXNbMToxMF0pCmBgYApgYGB7cn0KIyMgQXBwbHkgYWxsIHJlYWxpemF0aW9ucwpleHBfc3Vydl9tb2RlbF9zdXJ2aXZhbCA8LQogICAgZXhwX3N1cnZfbW9kZWxfc3Vydl9mdW5jICU+JQogICAgbXV0YXRlKHRpbWVzX2RmID0gbGlzdCh0aW1lc19kZikpICU+JQogICAgbXV0YXRlKHRpbWVzX2RmID0gcG1hcChsaXN0KHRpbWVzX2RmLCBgUyh0fDEpYCwgYFModHwwKWApLAogICAgICAgICAgICAgICAgICAgICAgICAgICBmdW5jdGlvbihkZiwgczEsIHMwKSB7ZGYgJT4lIG11dGF0ZShzMSA9IHMxKHQpLAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICBzMCA9IHMwKHQpKX0pKSAlPiUKICAgIHNlbGVjdCgtYFModHwxKWAsIC1gUyh0fDApYCkgJT4lCiAgICB1bm5lc3QoY29scyA9IGModGltZXNfZGYpKSAlPiUKICAgIGdhdGhlcihrZXkgPSBTdHJhdGEsIHZhbHVlID0gc3Vydml2YWwsIHMxLCBzMCkgJT4lCiAgICBtdXRhdGUoU3RyYXRhID0gZmFjdG9yKFN0cmF0YSwgIyBTdHJhdGEgaXMgd2hldGhlciBvciBub3QgcHJvamVjdCBoYXMgbWFqb3IgcmVsZWFzZXMKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgbGV2ZWxzID0gYygiczEiLCJzMCIpLAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICBsYWJlbHMgPSBjKCJtYWpvciByZWxlYXNlcz1ObyIsIm1ham9yIHJlbGVhc2VzPVllcyIpKSkKCiMjIEF2ZXJhZ2Ugb24gc3Vydml2YWwgc2NhbGUKZXhwX3N1cnZfbW9kZWxfc3Vydml2YWxfbWVhbiA8LQogICAgZXhwX3N1cnZfbW9kZWxfc3Vydml2YWwgJT4lCiAgICBncm91cF9ieShTdHJhdGEsIHQpICU+JQogICAgc3VtbWFyaXplKHN1cnZpdmFsX21lYW4gPSBtZWFuKHN1cnZpdmFsKSwKICAgICAgICAgICAgICBzdXJ2aXZhbF85NXVwcGVyID0gcXVhbnRpbGUoc3Vydml2YWwsIHByb2JzID0gMC45NzUpLAogICAgICAgICAgICAgIHN1cnZpdmFsXzk1bG93ZXIgPSBxdWFudGlsZShzdXJ2aXZhbCwgcHJvYnMgPSAwLjAyNSkpCgpleHBfc3Vydl9tb2RlbF9zdXJ2aXZhbApgYGAKCmBgYHtyfQojIHBsb3QgdGhlIGdyYXBocwooZ2dwbG90KGRhdGEgPSBleHBfc3Vydl9tb2RlbF9zdXJ2aXZhbCwgbWFwcGluZyA9IGFlcyh4ID0gdCwgeSA9IHN1cnZpdmFsLCBjb2xvciA9IFN0cmF0YSwgZ3JvdXAgPSBpbnRlcmFjdGlvbiguY2hhaW4sLmRyYXcsU3RyYXRhKSkpIAogICsgdGhlbWVfc3Vydm1pbmVyKCkKICsgZ2VvbV9saW5lKHNpemUgPSAwLjEsIGFscGhhID0gMC4wMikgCiArIGdlb21fbGluZShkYXRhID0gZXhwX3N1cnZfbW9kZWxfc3Vydml2YWxfbWVhbiwgbWFwcGluZyA9IGFlcyh5ID0gc3Vydml2YWxfbWVhbiwgZ3JvdXAgPSBTdHJhdGEpKSAKICsgZ2VvbV9saW5lKGRhdGEgPSBleHBfc3Vydl9tb2RlbF9zdXJ2aXZhbF9tZWFuLCBtYXBwaW5nID0gYWVzKHkgPSBzdXJ2aXZhbF85NXVwcGVyLCBncm91cCA9IFN0cmF0YSksIGxpbmV0eXBlID0gImRvdHRlZCIpIAogKyBnZW9tX2xpbmUoZGF0YSA9IGV4cF9zdXJ2X21vZGVsX3N1cnZpdmFsX21lYW4sIG1hcHBpbmcgPSBhZXMoeSA9IHN1cnZpdmFsXzk1bG93ZXIsIGdyb3VwID0gU3RyYXRhKSwgbGluZXR5cGUgPSAiZG90dGVkIikKICsgc2NhbGVfeV9jb250aW51b3VzKGxpbWl0cz1jKDAsMSkpCiArIHNjYWxlX3hfY29udGludW91cyhsaW1pdHM9YygwLDE2MCksIGJyZWFrcyA9IGMoMCw0MCw4MCwxMjAsMTYwKSkKICtsYWJzKHggPSAiVGltZSAoTW9udGhzKSIsIHkgPSAiU3Vydml2YWwgcHJvYmFiaWxpdHkiKSkKYGBgCg==